Brain_Tumor.ipynb
No Headings
The table of contents shows headings in notebooks and supported files.
- File
- Edit
- View
- Run
- Kernel
- Settings
- Help
Kernel status: Idle Executed 2 cellsElapsed time: 1982 seconds
[3]:
!pip install tensorflow
Collecting tensorflow
[7]:
Selection deleted
import os
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
import seaborn as sns
from tensorflow.keras.models import Model
from tensorflow.keras.layers import GlobalAveragePooling2D, Dense, Dropout, Input
from tensorflow.keras.applications import EfficientNetB0
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from sklearn.utils.class_weight import compute_class_weight
from sklearn.metrics import classification_report, confusion_matrix
from sklearn.utils.multiclass import unique_labels
# === CONFIG ===
DATA_DIR = "C:/Users/Rahila/Downloads/brain_tumor_dataset"
IMG_SIZE = (224, 224)
BATCH_SIZE = 32
NUM_CLASSES = 4
MODEL_PATH = "efficientnet_model.keras"
CLASS_NAMES = ['Glioma', 'Meningioma', 'No Tumor', 'Pituitary']
# === DATA LOADERS ===
train_gen = ImageDataGenerator(
rescale=1./255,
rotation_range=20,
width_shift_range=0.1,
height_shift_range=0.1,
zoom_range=0.1,
horizontal_flip=True,
vertical_flip=True
).flow_from_directory(
os.path.join(DATA_DIR, 'train'),
target_size=IMG_SIZE,
batch_size=BATCH_SIZE,
class_mode='sparse'
)
val_gen = ImageDataGenerator(rescale=1./255).flow_from_directory(
os.path.join(DATA_DIR, 'val'),
target_size=IMG_SIZE,
batch_size=BATCH_SIZE,
class_mode='sparse'
)
test_gen = ImageDataGenerator(rescale=1./255).flow_from_directory(
os.path.join(DATA_DIR, 'test'),
target_size=IMG_SIZE,
batch_size=1,
class_mode='sparse',
shuffle=False
)
# === CLASS WEIGHTS ===
labels = train_gen.classes
class_weights = compute_class_weight(
class_weight='balanced',
classes=np.unique(labels),
y=labels
)
class_weights = dict(enumerate(class_weights))
# === MODEL ===
base_model = EfficientNetB0(include_top=False, weights='imagenet', input_tensor=Input(shape=(224, 224, 3)))
base_model.trainable = False
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dropout(0.5)(x)
x = Dense(128, activation='relu')(x)
x = Dropout(0.3)(x)
output = Dense(NUM_CLASSES, activation='softmax')(x)
model = Model(inputs=base_model.input, outputs=output)
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
Found 1695 images belonging to 1 classes. Found 502 images belonging to 1 classes. Found 246 images belonging to 1 classes.
C:\Users\Rahila\anaconda3\Lib\site-packages\keras\src\trainers\data_adapters\py_dataset_adapter.py:121: UserWarning: Your `PyDataset` class should call `super().__init__(**kwargs)` in its constructor. `**kwargs` can include `workers`, `use_multiprocessing`, `max_queue_size`. Do not pass these arguments to `fit()`, as they will be ignored. self._warn_if_super_not_called()
Epoch 1/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 99s 2s/step - accuracy: 0.9070 - loss: 0.2737 - val_accuracy: 1.0000 - val_loss: 1.5559e-06 Epoch 2/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 75s 1s/step - accuracy: 1.0000 - loss: 2.3420e-05 - val_accuracy: 1.0000 - val_loss: 1.3177e-06 Epoch 3/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 76s 1s/step - accuracy: 1.0000 - loss: 1.6340e-05 - val_accuracy: 1.0000 - val_loss: 1.1949e-06 Epoch 4/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 76s 1s/step - accuracy: 1.0000 - loss: 3.4486e-05 - val_accuracy: 1.0000 - val_loss: 1.1814e-06 Epoch 5/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 75s 1s/step - accuracy: 1.0000 - loss: 2.5730e-05 - val_accuracy: 1.0000 - val_loss: 9.5819e-07 Epoch 6/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 76s 1s/step - accuracy: 1.0000 - loss: 1.8743e-05 - val_accuracy: 1.0000 - val_loss: 8.3684e-07 Epoch 7/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 76s 1s/step - accuracy: 1.0000 - loss: 2.0960e-05 - val_accuracy: 1.0000 - val_loss: 6.2169e-07 Epoch 8/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 82s 1s/step - accuracy: 1.0000 - loss: 1.3034e-05 - val_accuracy: 1.0000 - val_loss: 5.9605e-07 Epoch 9/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 77s 1s/step - accuracy: 1.0000 - loss: 1.1171e-05 - val_accuracy: 1.0000 - val_loss: 4.8942e-07 Epoch 10/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 76s 1s/step - accuracy: 1.0000 - loss: 1.0529e-05 - val_accuracy: 1.0000 - val_loss: 4.7684e-07 Epoch 11/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 76s 1s/step - accuracy: 1.0000 - loss: 5.5299e-06 - val_accuracy: 1.0000 - val_loss: 3.6285e-07 Epoch 12/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 77s 1s/step - accuracy: 1.0000 - loss: 5.4624e-06 - val_accuracy: 1.0000 - val_loss: 3.5763e-07 Epoch 13/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 75s 1s/step - accuracy: 1.0000 - loss: 5.0444e-06 - val_accuracy: 1.0000 - val_loss: 2.4079e-07 Epoch 14/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 75s 1s/step - accuracy: 1.0000 - loss: 1.2188e-05 - val_accuracy: 1.0000 - val_loss: 2.3842e-07 Epoch 15/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 75s 1s/step - accuracy: 1.0000 - loss: 7.1924e-06 - val_accuracy: 1.0000 - val_loss: 2.3842e-07 Epoch 16/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 74s 1s/step - accuracy: 1.0000 - loss: 8.6971e-06 - val_accuracy: 1.0000 - val_loss: 2.3842e-07 Epoch 17/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 163s 3s/step - accuracy: 1.0000 - loss: 3.8749e-06 - val_accuracy: 1.0000 - val_loss: 2.3842e-07 Epoch 18/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 77s 1s/step - accuracy: 1.0000 - loss: 5.4178e-06 - val_accuracy: 1.0000 - val_loss: 1.2040e-07 Epoch 19/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 78s 1s/step - accuracy: 1.0000 - loss: 1.8848e-06 - val_accuracy: 1.0000 - val_loss: 1.1398e-07 Epoch 20/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 77s 1s/step - accuracy: 1.0000 - loss: 7.7386e-06 - val_accuracy: 1.0000 - val_loss: 0.0000e+00 Epoch 21/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 75s 1s/step - accuracy: 1.0000 - loss: 3.5452e-06 - val_accuracy: 1.0000 - val_loss: 0.0000e+00 Epoch 22/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 165s 3s/step - accuracy: 1.0000 - loss: 2.6283e-06 - val_accuracy: 1.0000 - val_loss: 0.0000e+00 Epoch 23/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 75s 1s/step - accuracy: 1.0000 - loss: 2.1228e-06 - val_accuracy: 1.0000 - val_loss: 0.0000e+00 Epoch 24/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 76s 1s/step - accuracy: 1.0000 - loss: 1.5144e-06 - val_accuracy: 1.0000 - val_loss: 0.0000e+00 Epoch 25/30 53/53 ━━━━━━━━━━━━━━━━━━━━ 75s 1s/step - accuracy: 1.0000 - loss: 4.4546e-05 - val_accuracy: 1.0000 - val_loss: 0.0000e+00 246/246 ━━━━━━━━━━━━━━━━━━━━ 24s 68ms/step - accuracy: 1.0000 - loss: 0.0000e+00 ✅ Test Accuracy: 1.0000 246/246 ━━━━━━━━━━━━━━━━━━━━ 19s 65ms/step precision recall f1-score support Glioma 1.00 1.00 1.00 246 accuracy 1.00 246 macro avg 1.00 1.00 1.00 246 weighted avg 1.00 1.00 1.00 246
C:\Users\Rahila\anaconda3\Lib\site-packages\sklearn\metrics\_classification.py:409: UserWarning: A single label was found in 'y_true' and 'y_pred'. For the confusion matrix to have the correct shape, use the 'labels' parameter to pass all known labels. warnings.warn(
[ ]:
Common Tools
No metadata.
Advanced Tools
No metadata.
Anaconda Assistant
AI-powered coding, insights and debugging in your notebooks.
To enable the following extensions, create an account or sign in.
- Anaconda Assistant4.1.0
- Coming soon!
- Data Catalogs
- Panel Deployments
- Sharing
Already have an account? Sign In
For more information, read our Anaconda Assistant documentation.
![Python [conda env:base] *](./Brain_Tumor_files/logo-64x64.png)